import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
import random
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import time
print(tf.config.list_physical_devices('GPU'))
[PhysicalDevice(name='/physical_device:GPU:0', device_type='GPU')]
train_dir = 'img/train'
test_dir = 'img/test'
valid_dir = 'img/valid'
BATCH_SIZE = 256
IMG_SHAPE = (224, 224)
image_generator_train = ImageDataGenerator(rescale=1./255,
horizontal_flip=True,
zoom_range=0.3,
rotation_range=60,
width_shift_range=0.2,
height_shift_range=0.2)
train_generator = image_generator_train.flow_from_directory(directory=train_dir,
batch_size=BATCH_SIZE,
shuffle=True,
target_size=IMG_SHAPE,
class_mode='sparse')
num_training_classes = train_generator.num_classes
num_training_intances = len(train_generator.filenames)
num_training_intances, num_training_classes
Found 62093 images belonging to 129 classes.
(62093, 129)
image_generator_val = ImageDataGenerator(rescale=1./255)
val_generator = image_generator_val.flow_from_directory(directory=valid_dir,
batch_size=BATCH_SIZE,
shuffle=True,
target_size=IMG_SHAPE,
class_mode='sparse')
Found 20698 images belonging to 129 classes.
image_generator_test = ImageDataGenerator(rescale=1./255)
test_generator = image_generator_test.flow_from_directory(directory=test_dir,
batch_size=1,
shuffle=True,
target_size=IMG_SHAPE,
class_mode='sparse')
Found 20698 images belonging to 129 classes.
random_img = random.randint(0, BATCH_SIZE)
columns = 5
rows = 5
fig=plt.figure(figsize=(40, 40))
for i in range(1, columns * rows + 1):
fig.add_subplot(rows, columns, i)
augmented_image = train_generator[0][0][random_img]
plt.imshow(augmented_image)
plt.show()
IMG_SIZE = (224, 224)
IMG_SHAPE = (IMG_SIZE + (3,))
base_model = tf.keras.applications.ResNet50V2(input_shape=IMG_SHAPE,
include_top=False, # Do not include the ImageNet classifier at the top.
weights='imagenet') # Load weights pre-trained on ImageNet.
# Freeze the base_model
base_model.trainable = False
# Print the architecture
base_model.summary()
Model: "resnet50v2"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 224, 224, 3) 0
__________________________________________________________________________________________________
conv1_pad (ZeroPadding2D) (None, 230, 230, 3) 0 input_1[0][0]
__________________________________________________________________________________________________
conv1_conv (Conv2D) (None, 112, 112, 64) 9472 conv1_pad[0][0]
__________________________________________________________________________________________________
pool1_pad (ZeroPadding2D) (None, 114, 114, 64) 0 conv1_conv[0][0]
__________________________________________________________________________________________________
pool1_pool (MaxPooling2D) (None, 56, 56, 64) 0 pool1_pad[0][0]
__________________________________________________________________________________________________
conv2_block1_preact_bn (BatchNo (None, 56, 56, 64) 256 pool1_pool[0][0]
__________________________________________________________________________________________________
conv2_block1_preact_relu (Activ (None, 56, 56, 64) 0 conv2_block1_preact_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_1_conv (Conv2D) (None, 56, 56, 64) 4096 conv2_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_1_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block1_1_relu (Activation (None, 56, 56, 64) 0 conv2_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_2_pad (ZeroPadding (None, 58, 58, 64) 0 conv2_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_2_conv (Conv2D) (None, 56, 56, 64) 36864 conv2_block1_2_pad[0][0]
__________________________________________________________________________________________________
conv2_block1_2_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block1_2_relu (Activation (None, 56, 56, 64) 0 conv2_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_0_conv (Conv2D) (None, 56, 56, 256) 16640 conv2_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_3_conv (Conv2D) (None, 56, 56, 256) 16640 conv2_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_out (Add) (None, 56, 56, 256) 0 conv2_block1_0_conv[0][0]
conv2_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_preact_bn (BatchNo (None, 56, 56, 256) 1024 conv2_block1_out[0][0]
__________________________________________________________________________________________________
conv2_block2_preact_relu (Activ (None, 56, 56, 256) 0 conv2_block2_preact_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_1_conv (Conv2D) (None, 56, 56, 64) 16384 conv2_block2_preact_relu[0][0]
__________________________________________________________________________________________________
conv2_block2_1_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_1_relu (Activation (None, 56, 56, 64) 0 conv2_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_2_pad (ZeroPadding (None, 58, 58, 64) 0 conv2_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block2_2_conv (Conv2D) (None, 56, 56, 64) 36864 conv2_block2_2_pad[0][0]
__________________________________________________________________________________________________
conv2_block2_2_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_2_relu (Activation (None, 56, 56, 64) 0 conv2_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_3_conv (Conv2D) (None, 56, 56, 256) 16640 conv2_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv2_block2_out (Add) (None, 56, 56, 256) 0 conv2_block1_out[0][0]
conv2_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_preact_bn (BatchNo (None, 56, 56, 256) 1024 conv2_block2_out[0][0]
__________________________________________________________________________________________________
conv2_block3_preact_relu (Activ (None, 56, 56, 256) 0 conv2_block3_preact_bn[0][0]
__________________________________________________________________________________________________
conv2_block3_1_conv (Conv2D) (None, 56, 56, 64) 16384 conv2_block3_preact_relu[0][0]
__________________________________________________________________________________________________
conv2_block3_1_bn (BatchNormali (None, 56, 56, 64) 256 conv2_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_1_relu (Activation (None, 56, 56, 64) 0 conv2_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block3_2_pad (ZeroPadding (None, 58, 58, 64) 0 conv2_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block3_2_conv (Conv2D) (None, 28, 28, 64) 36864 conv2_block3_2_pad[0][0]
__________________________________________________________________________________________________
conv2_block3_2_bn (BatchNormali (None, 28, 28, 64) 256 conv2_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_2_relu (Activation (None, 28, 28, 64) 0 conv2_block3_2_bn[0][0]
__________________________________________________________________________________________________
max_pooling2d (MaxPooling2D) (None, 28, 28, 256) 0 conv2_block2_out[0][0]
__________________________________________________________________________________________________
conv2_block3_3_conv (Conv2D) (None, 28, 28, 256) 16640 conv2_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv2_block3_out (Add) (None, 28, 28, 256) 0 max_pooling2d[0][0]
conv2_block3_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_preact_bn (BatchNo (None, 28, 28, 256) 1024 conv2_block3_out[0][0]
__________________________________________________________________________________________________
conv3_block1_preact_relu (Activ (None, 28, 28, 256) 0 conv3_block1_preact_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_1_conv (Conv2D) (None, 28, 28, 128) 32768 conv3_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_1_relu (Activation (None, 28, 28, 128) 0 conv3_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_2_pad (ZeroPadding (None, 30, 30, 128) 0 conv3_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_2_conv (Conv2D) (None, 28, 28, 128) 147456 conv3_block1_2_pad[0][0]
__________________________________________________________________________________________________
conv3_block1_2_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_2_relu (Activation (None, 28, 28, 128) 0 conv3_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_0_conv (Conv2D) (None, 28, 28, 512) 131584 conv3_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_3_conv (Conv2D) (None, 28, 28, 512) 66048 conv3_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_out (Add) (None, 28, 28, 512) 0 conv3_block1_0_conv[0][0]
conv3_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_preact_bn (BatchNo (None, 28, 28, 512) 2048 conv3_block1_out[0][0]
__________________________________________________________________________________________________
conv3_block2_preact_relu (Activ (None, 28, 28, 512) 0 conv3_block2_preact_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_1_conv (Conv2D) (None, 28, 28, 128) 65536 conv3_block2_preact_relu[0][0]
__________________________________________________________________________________________________
conv3_block2_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_1_relu (Activation (None, 28, 28, 128) 0 conv3_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_2_pad (ZeroPadding (None, 30, 30, 128) 0 conv3_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block2_2_conv (Conv2D) (None, 28, 28, 128) 147456 conv3_block2_2_pad[0][0]
__________________________________________________________________________________________________
conv3_block2_2_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_2_relu (Activation (None, 28, 28, 128) 0 conv3_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_3_conv (Conv2D) (None, 28, 28, 512) 66048 conv3_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block2_out (Add) (None, 28, 28, 512) 0 conv3_block1_out[0][0]
conv3_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_preact_bn (BatchNo (None, 28, 28, 512) 2048 conv3_block2_out[0][0]
__________________________________________________________________________________________________
conv3_block3_preact_relu (Activ (None, 28, 28, 512) 0 conv3_block3_preact_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_1_conv (Conv2D) (None, 28, 28, 128) 65536 conv3_block3_preact_relu[0][0]
__________________________________________________________________________________________________
conv3_block3_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_1_relu (Activation (None, 28, 28, 128) 0 conv3_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_2_pad (ZeroPadding (None, 30, 30, 128) 0 conv3_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block3_2_conv (Conv2D) (None, 28, 28, 128) 147456 conv3_block3_2_pad[0][0]
__________________________________________________________________________________________________
conv3_block3_2_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_2_relu (Activation (None, 28, 28, 128) 0 conv3_block3_2_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_3_conv (Conv2D) (None, 28, 28, 512) 66048 conv3_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block3_out (Add) (None, 28, 28, 512) 0 conv3_block2_out[0][0]
conv3_block3_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_preact_bn (BatchNo (None, 28, 28, 512) 2048 conv3_block3_out[0][0]
__________________________________________________________________________________________________
conv3_block4_preact_relu (Activ (None, 28, 28, 512) 0 conv3_block4_preact_bn[0][0]
__________________________________________________________________________________________________
conv3_block4_1_conv (Conv2D) (None, 28, 28, 128) 65536 conv3_block4_preact_relu[0][0]
__________________________________________________________________________________________________
conv3_block4_1_bn (BatchNormali (None, 28, 28, 128) 512 conv3_block4_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_1_relu (Activation (None, 28, 28, 128) 0 conv3_block4_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block4_2_pad (ZeroPadding (None, 30, 30, 128) 0 conv3_block4_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block4_2_conv (Conv2D) (None, 14, 14, 128) 147456 conv3_block4_2_pad[0][0]
__________________________________________________________________________________________________
conv3_block4_2_bn (BatchNormali (None, 14, 14, 128) 512 conv3_block4_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_2_relu (Activation (None, 14, 14, 128) 0 conv3_block4_2_bn[0][0]
__________________________________________________________________________________________________
max_pooling2d_1 (MaxPooling2D) (None, 14, 14, 512) 0 conv3_block3_out[0][0]
__________________________________________________________________________________________________
conv3_block4_3_conv (Conv2D) (None, 14, 14, 512) 66048 conv3_block4_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block4_out (Add) (None, 14, 14, 512) 0 max_pooling2d_1[0][0]
conv3_block4_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_preact_bn (BatchNo (None, 14, 14, 512) 2048 conv3_block4_out[0][0]
__________________________________________________________________________________________________
conv4_block1_preact_relu (Activ (None, 14, 14, 512) 0 conv4_block1_preact_bn[0][0]
__________________________________________________________________________________________________
conv4_block1_1_conv (Conv2D) (None, 14, 14, 256) 131072 conv4_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv4_block1_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_1_relu (Activation (None, 14, 14, 256) 0 conv4_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block1_2_pad (ZeroPadding (None, 16, 16, 256) 0 conv4_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block1_2_conv (Conv2D) (None, 14, 14, 256) 589824 conv4_block1_2_pad[0][0]
__________________________________________________________________________________________________
conv4_block1_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_2_relu (Activation (None, 14, 14, 256) 0 conv4_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block1_0_conv (Conv2D) (None, 14, 14, 1024) 525312 conv4_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv4_block1_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block1_out (Add) (None, 14, 14, 1024) 0 conv4_block1_0_conv[0][0]
conv4_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block2_preact_bn (BatchNo (None, 14, 14, 1024) 4096 conv4_block1_out[0][0]
__________________________________________________________________________________________________
conv4_block2_preact_relu (Activ (None, 14, 14, 1024) 0 conv4_block2_preact_bn[0][0]
__________________________________________________________________________________________________
conv4_block2_1_conv (Conv2D) (None, 14, 14, 256) 262144 conv4_block2_preact_relu[0][0]
__________________________________________________________________________________________________
conv4_block2_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block2_1_relu (Activation (None, 14, 14, 256) 0 conv4_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block2_2_pad (ZeroPadding (None, 16, 16, 256) 0 conv4_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block2_2_conv (Conv2D) (None, 14, 14, 256) 589824 conv4_block2_2_pad[0][0]
__________________________________________________________________________________________________
conv4_block2_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block2_2_relu (Activation (None, 14, 14, 256) 0 conv4_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block2_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block2_out (Add) (None, 14, 14, 1024) 0 conv4_block1_out[0][0]
conv4_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block3_preact_bn (BatchNo (None, 14, 14, 1024) 4096 conv4_block2_out[0][0]
__________________________________________________________________________________________________
conv4_block3_preact_relu (Activ (None, 14, 14, 1024) 0 conv4_block3_preact_bn[0][0]
__________________________________________________________________________________________________
conv4_block3_1_conv (Conv2D) (None, 14, 14, 256) 262144 conv4_block3_preact_relu[0][0]
__________________________________________________________________________________________________
conv4_block3_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block3_1_relu (Activation (None, 14, 14, 256) 0 conv4_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block3_2_pad (ZeroPadding (None, 16, 16, 256) 0 conv4_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block3_2_conv (Conv2D) (None, 14, 14, 256) 589824 conv4_block3_2_pad[0][0]
__________________________________________________________________________________________________
conv4_block3_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block3_2_relu (Activation (None, 14, 14, 256) 0 conv4_block3_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block3_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block3_out (Add) (None, 14, 14, 1024) 0 conv4_block2_out[0][0]
conv4_block3_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block4_preact_bn (BatchNo (None, 14, 14, 1024) 4096 conv4_block3_out[0][0]
__________________________________________________________________________________________________
conv4_block4_preact_relu (Activ (None, 14, 14, 1024) 0 conv4_block4_preact_bn[0][0]
__________________________________________________________________________________________________
conv4_block4_1_conv (Conv2D) (None, 14, 14, 256) 262144 conv4_block4_preact_relu[0][0]
__________________________________________________________________________________________________
conv4_block4_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block4_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block4_1_relu (Activation (None, 14, 14, 256) 0 conv4_block4_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block4_2_pad (ZeroPadding (None, 16, 16, 256) 0 conv4_block4_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block4_2_conv (Conv2D) (None, 14, 14, 256) 589824 conv4_block4_2_pad[0][0]
__________________________________________________________________________________________________
conv4_block4_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block4_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block4_2_relu (Activation (None, 14, 14, 256) 0 conv4_block4_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block4_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block4_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block4_out (Add) (None, 14, 14, 1024) 0 conv4_block3_out[0][0]
conv4_block4_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block5_preact_bn (BatchNo (None, 14, 14, 1024) 4096 conv4_block4_out[0][0]
__________________________________________________________________________________________________
conv4_block5_preact_relu (Activ (None, 14, 14, 1024) 0 conv4_block5_preact_bn[0][0]
__________________________________________________________________________________________________
conv4_block5_1_conv (Conv2D) (None, 14, 14, 256) 262144 conv4_block5_preact_relu[0][0]
__________________________________________________________________________________________________
conv4_block5_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block5_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block5_1_relu (Activation (None, 14, 14, 256) 0 conv4_block5_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block5_2_pad (ZeroPadding (None, 16, 16, 256) 0 conv4_block5_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block5_2_conv (Conv2D) (None, 14, 14, 256) 589824 conv4_block5_2_pad[0][0]
__________________________________________________________________________________________________
conv4_block5_2_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block5_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block5_2_relu (Activation (None, 14, 14, 256) 0 conv4_block5_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block5_3_conv (Conv2D) (None, 14, 14, 1024) 263168 conv4_block5_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block5_out (Add) (None, 14, 14, 1024) 0 conv4_block4_out[0][0]
conv4_block5_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block6_preact_bn (BatchNo (None, 14, 14, 1024) 4096 conv4_block5_out[0][0]
__________________________________________________________________________________________________
conv4_block6_preact_relu (Activ (None, 14, 14, 1024) 0 conv4_block6_preact_bn[0][0]
__________________________________________________________________________________________________
conv4_block6_1_conv (Conv2D) (None, 14, 14, 256) 262144 conv4_block6_preact_relu[0][0]
__________________________________________________________________________________________________
conv4_block6_1_bn (BatchNormali (None, 14, 14, 256) 1024 conv4_block6_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block6_1_relu (Activation (None, 14, 14, 256) 0 conv4_block6_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block6_2_pad (ZeroPadding (None, 16, 16, 256) 0 conv4_block6_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block6_2_conv (Conv2D) (None, 7, 7, 256) 589824 conv4_block6_2_pad[0][0]
__________________________________________________________________________________________________
conv4_block6_2_bn (BatchNormali (None, 7, 7, 256) 1024 conv4_block6_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block6_2_relu (Activation (None, 7, 7, 256) 0 conv4_block6_2_bn[0][0]
__________________________________________________________________________________________________
max_pooling2d_2 (MaxPooling2D) (None, 7, 7, 1024) 0 conv4_block5_out[0][0]
__________________________________________________________________________________________________
conv4_block6_3_conv (Conv2D) (None, 7, 7, 1024) 263168 conv4_block6_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block6_out (Add) (None, 7, 7, 1024) 0 max_pooling2d_2[0][0]
conv4_block6_3_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_preact_bn (BatchNo (None, 7, 7, 1024) 4096 conv4_block6_out[0][0]
__________________________________________________________________________________________________
conv5_block1_preact_relu (Activ (None, 7, 7, 1024) 0 conv5_block1_preact_bn[0][0]
__________________________________________________________________________________________________
conv5_block1_1_conv (Conv2D) (None, 7, 7, 512) 524288 conv5_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv5_block1_1_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_1_relu (Activation (None, 7, 7, 512) 0 conv5_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block1_2_pad (ZeroPadding (None, 9, 9, 512) 0 conv5_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block1_2_conv (Conv2D) (None, 7, 7, 512) 2359296 conv5_block1_2_pad[0][0]
__________________________________________________________________________________________________
conv5_block1_2_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_2_relu (Activation (None, 7, 7, 512) 0 conv5_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv5_block1_0_conv (Conv2D) (None, 7, 7, 2048) 2099200 conv5_block1_preact_relu[0][0]
__________________________________________________________________________________________________
conv5_block1_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 conv5_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv5_block1_out (Add) (None, 7, 7, 2048) 0 conv5_block1_0_conv[0][0]
conv5_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv5_block2_preact_bn (BatchNo (None, 7, 7, 2048) 8192 conv5_block1_out[0][0]
__________________________________________________________________________________________________
conv5_block2_preact_relu (Activ (None, 7, 7, 2048) 0 conv5_block2_preact_bn[0][0]
__________________________________________________________________________________________________
conv5_block2_1_conv (Conv2D) (None, 7, 7, 512) 1048576 conv5_block2_preact_relu[0][0]
__________________________________________________________________________________________________
conv5_block2_1_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block2_1_relu (Activation (None, 7, 7, 512) 0 conv5_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block2_2_pad (ZeroPadding (None, 9, 9, 512) 0 conv5_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block2_2_conv (Conv2D) (None, 7, 7, 512) 2359296 conv5_block2_2_pad[0][0]
__________________________________________________________________________________________________
conv5_block2_2_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block2_2_relu (Activation (None, 7, 7, 512) 0 conv5_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv5_block2_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 conv5_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv5_block2_out (Add) (None, 7, 7, 2048) 0 conv5_block1_out[0][0]
conv5_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv5_block3_preact_bn (BatchNo (None, 7, 7, 2048) 8192 conv5_block2_out[0][0]
__________________________________________________________________________________________________
conv5_block3_preact_relu (Activ (None, 7, 7, 2048) 0 conv5_block3_preact_bn[0][0]
__________________________________________________________________________________________________
conv5_block3_1_conv (Conv2D) (None, 7, 7, 512) 1048576 conv5_block3_preact_relu[0][0]
__________________________________________________________________________________________________
conv5_block3_1_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block3_1_relu (Activation (None, 7, 7, 512) 0 conv5_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block3_2_pad (ZeroPadding (None, 9, 9, 512) 0 conv5_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block3_2_conv (Conv2D) (None, 7, 7, 512) 2359296 conv5_block3_2_pad[0][0]
__________________________________________________________________________________________________
conv5_block3_2_bn (BatchNormali (None, 7, 7, 512) 2048 conv5_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block3_2_relu (Activation (None, 7, 7, 512) 0 conv5_block3_2_bn[0][0]
__________________________________________________________________________________________________
conv5_block3_3_conv (Conv2D) (None, 7, 7, 2048) 1050624 conv5_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv5_block3_out (Add) (None, 7, 7, 2048) 0 conv5_block2_out[0][0]
conv5_block3_3_conv[0][0]
__________________________________________________________________________________________________
post_bn (BatchNormalization) (None, 7, 7, 2048) 8192 conv5_block3_out[0][0]
__________________________________________________________________________________________________
post_relu (Activation) (None, 7, 7, 2048) 0 post_bn[0][0]
==================================================================================================
Total params: 23,564,800
Trainable params: 0
Non-trainable params: 23,564,800
__________________________________________________________________________________________________
inputs = tf.keras.Input(shape=IMG_SHAPE)
x = base_model(inputs, training=False)
x = tf.keras.layers.GlobalAvgPool2D()(x)
x = tf.keras.layers.Dropout(.2)(x)
outputs = tf.keras.layers.Dense(num_training_classes,
activation=tf.keras.activations.softmax,
kernel_regularizer=tf.keras.regularizers.l2(0.001))(x)
model = tf.keras.models.Model(inputs, outputs)
model.summary()
Model: "model" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_2 (InputLayer) [(None, 224, 224, 3)] 0 _________________________________________________________________ resnet50v2 (Functional) (None, 7, 7, 2048) 23564800 _________________________________________________________________ global_average_pooling2d (Gl (None, 2048) 0 _________________________________________________________________ dropout (Dropout) (None, 2048) 0 _________________________________________________________________ dense (Dense) (None, 129) 264321 ================================================================= Total params: 23,829,121 Trainable params: 264,321 Non-trainable params: 23,564,800 _________________________________________________________________
# Define the optimizer, loss and metrics
LEARNING_RATE = 0.01
model.compile(optimizer=tf.keras.optimizers.SGD(lr=LEARNING_RATE, momentum=0.9, decay=1e-3),
loss=tf.keras.losses.SparseCategoricalCrossentropy(),
metrics=['accuracy'])
# Define EarlyStopping as the strategy to avoid overfitting
early_stopping = tf.keras.callbacks.EarlyStopping(monitor = 'val_loss',
patience = 10,
restore_best_weights=True, min_delta=0.01)
# Define ReduceLROnPlateau as the learning schedule strategy
lr_scheduler = tf.keras.callbacks.ReduceLROnPlateau(factor = 0.5,
patience = 2)
class PrintSGDLR(tf.keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs=None):
lr = self.model.optimizer.lr
decay = self.model.optimizer.decay
iterations = self.model.optimizer.iterations
lr_with_decay = lr / (1. + decay * tf.keras.backend.cast(iterations, tf.keras.backend.dtype(decay)))
print("Learning Rate = ", tf.keras.backend.eval(lr_with_decay))
model_name = time.strftime("%d_%m_%Y-%H_%M_%S")
model_checkpoint = tf.keras.callbacks.ModelCheckpoint('checkpoints/' + model_name + '.h5',
monitor = 'val_loss',
save_best_only=True,
verbose=1)
# Compute the class weights
from sklearn.utils import class_weight
class_weights = dict(enumerate(class_weight.compute_class_weight('balanced',
classes=np.unique(train_generator.classes),
y=train_generator.classes)))
class_weights
{0: 1.2600551970453346,
1: 1.0816653601602648,
2: 0.8130761575529017,
3: 0.8214011694049792,
4: 1.2003518335943089,
5: 1.5527131782945736,
6: 1.0007091169881868,
7: 1.8656631212066583,
8: 2.3141398330351817,
9: 0.8504259457090421,
10: 2.5878552971576227,
11: 1.0007091169881868,
12: 1.1711461928742526,
13: 0.7995699090885678,
14: 0.8158323479174878,
15: 0.8200018488438123,
16: 1.4283118257309135,
17: 1.7129575988303125,
18: 1.129908650871638,
19: 0.8270465382668691,
20: 0.7509221299084522,
21: 2.506984819121447,
22: 1.756719289311379,
23: 0.7801314185920872,
24: 0.7701457364341086,
25: 1.7314427527745246,
26: 0.8049182027948458,
27: 0.795605099622013,
28: 0.8049182027948458,
29: 0.7738602657095142,
30: 0.7995699090885678,
31: 0.7982439225063314,
32: 1.9025339338787266,
33: 0.7903794503634118,
34: 0.8214011694049792,
35: 1.2185850260033362,
36: 0.9328315606033292,
37: 2.207986629684944,
38: 1.0197904348968598,
39: 0.8327700437220031,
40: 0.802235142118863,
41: 2.5467782289487717,
42: 0.8284700262845401,
43: 0.7788690700183136,
44: 2.083727641867177,
45: 0.7628226390987605,
46: 0.8415053938309752,
47: 0.810338527392791,
48: 0.7801314185920872,
49: 1.8876120991032073,
50: 0.8186072878763909,
51: 0.7890837463464226,
52: 0.9238792423633739,
53: 0.8089766139013745,
54: 0.8049182027948458,
55: 0.8009003082717435,
56: 0.8130761575529017,
57: 0.8158323479174878,
58: 0.810338527392791,
59: 0.7852219988112852,
60: 2.2812373709541127,
61: 0.8009003082717435,
62: 0.7839431356210388,
63: 1.1406186854770564,
64: 0.786505041293003,
65: 0.8035744328402635,
66: 0.7890837463464226,
67: 0.9116308433168898,
68: 0.7982439225063314,
69: 1.072029143143247,
70: 0.795605099622013,
71: 1.4630428123748263,
72: 0.8474314881537286,
73: 0.8270465382668691,
74: 1.648428374216842,
75: 1.9727093658660566,
76: 0.8371149309066397,
77: 0.9221093587573138,
78: 1.3223656188772468,
79: 1.2003518335943089,
80: 1.1914383298794995,
81: 0.8400367980302231,
82: 0.8200018488438123,
83: 2.074746057203956,
84: 2.382876659758999,
85: 0.8172174622603019,
86: 2.2284309503301754,
87: 1.6597968457631649,
88: 0.7995699090885678,
89: 0.7801314185920872,
90: 0.9843375976918565,
91: 2.5201103940906693,
92: 2.148844130675526,
93: 0.7929836660153506,
94: 0.7751064175061478,
95: 0.7839431356210388,
96: 2.057013184920162,
97: 0.873577287243771,
98: 0.7916794165646674,
99: 0.7982439225063314,
100: 0.8327700437220031,
101: 0.8009003082717435,
102: 2.057013184920162,
103: 1.9025339338787266,
104: 0.8130761575529017,
105: 0.8980244128196229,
106: 0.7839431356210388,
107: 1.8513118664281456,
108: 1.0396135751000386,
109: 0.8049182027948458,
110: 0.9438060495516036,
111: 1.6316646958349756,
112: 0.7713799443450606,
113: 2.506984819121447,
114: 0.8117050341843471,
115: 1.0509630682779865,
116: 0.7929836660153506,
117: 0.7788690700183136,
118: 0.8062664744913196,
119: 0.7763565891472868,
120: 0.8214011694049792,
121: 0.8144519209328559,
122: 0.8062664744913196,
123: 0.810338527392791,
124: 0.8342133193610362,
125: 1.5885844398393327,
126: 1.756719289311379,
127: 1.7376934486329163,
128: 1.706883281103964}
EPOCHS = 20
history = model.fit(train_generator,
epochs=EPOCHS,
callbacks=[early_stopping, lr_scheduler, PrintSGDLR()],
validation_data=val_generator,
class_weight=class_weights
)
Epoch 1/20 243/243 [==============================] - 683s 3s/step - loss: 5.1746 - accuracy: 0.0309 - val_loss: 4.2803 - val_accuracy: 0.1129 Learning Rate = 0.008045052 Epoch 2/20 243/243 [==============================] - 647s 3s/step - loss: 4.4595 - accuracy: 0.0851 - val_loss: 4.0165 - val_accuracy: 0.1581 Learning Rate = 0.0067294748 Epoch 3/20 243/243 [==============================] - 675s 3s/step - loss: 4.2499 - accuracy: 0.1094 - val_loss: 3.9153 - val_accuracy: 0.1791 Learning Rate = 0.0057836897 Epoch 4/20 243/243 [==============================] - 649s 3s/step - loss: 4.1437 - accuracy: 0.1242 - val_loss: 3.8310 - val_accuracy: 0.1929 Learning Rate = 0.0050709937 Epoch 5/20 243/243 [==============================] - 661s 3s/step - loss: 4.0591 - accuracy: 0.1351 - val_loss: 3.7950 - val_accuracy: 0.2041 Learning Rate = 0.0045146723 Epoch 6/20 243/243 [==============================] - 667s 3s/step - loss: 4.0238 - accuracy: 0.1441 - val_loss: 3.7573 - val_accuracy: 0.2126 Learning Rate = 0.0040683476 Epoch 7/20 243/243 [==============================] - 668s 3s/step - loss: 3.9670 - accuracy: 0.1480 - val_loss: 3.7213 - val_accuracy: 0.2120 Learning Rate = 0.003702332 Epoch 8/20 243/243 [==============================] - 677s 3s/step - loss: 3.9368 - accuracy: 0.1530 - val_loss: 3.6968 - val_accuracy: 0.2257 Learning Rate = 0.0033967388 Epoch 9/20 243/243 [==============================] - 654s 3s/step - loss: 3.9130 - accuracy: 0.1598 - val_loss: 3.6706 - val_accuracy: 0.2307 Learning Rate = 0.003137747 Epoch 10/20 243/243 [==============================] - 635s 3s/step - loss: 3.8812 - accuracy: 0.1625 - val_loss: 3.6682 - val_accuracy: 0.2254 Learning Rate = 0.0029154518 Epoch 11/20 243/243 [==============================] - 659s 3s/step - loss: 3.8622 - accuracy: 0.1636 - val_loss: 3.6431 - val_accuracy: 0.2360 Learning Rate = 0.00272257 Epoch 12/20 243/243 [==============================] - 670s 3s/step - loss: 3.8388 - accuracy: 0.1689 - val_loss: 3.6275 - val_accuracy: 0.2405 Learning Rate = 0.002553626 Epoch 13/20 243/243 [==============================] - 659s 3s/step - loss: 3.8466 - accuracy: 0.1703 - val_loss: 3.6190 - val_accuracy: 0.2430 Learning Rate = 0.002404424 Epoch 14/20 243/243 [==============================] - 666s 3s/step - loss: 3.8205 - accuracy: 0.1727 - val_loss: 3.6075 - val_accuracy: 0.2403 Learning Rate = 0.0022716944 Epoch 15/20 243/243 [==============================] - 699s 3s/step - loss: 3.8095 - accuracy: 0.1740 - val_loss: 3.5945 - val_accuracy: 0.2449 Learning Rate = 0.0021528522 Epoch 16/20 243/243 [==============================] - 680s 3s/step - loss: 3.8001 - accuracy: 0.1776 - val_loss: 3.5826 - val_accuracy: 0.2520 Learning Rate = 0.0020458263 Epoch 17/20 243/243 [==============================] - 679s 3s/step - loss: 3.7903 - accuracy: 0.1812 - val_loss: 3.5888 - val_accuracy: 0.2452 Learning Rate = 0.0019489378 Epoch 18/20 243/243 [==============================] - 663s 3s/step - loss: 3.7774 - accuracy: 0.1806 - val_loss: 3.5880 - val_accuracy: 0.2428 Learning Rate = 0.00093040563 Epoch 19/20 243/243 [==============================] - 731s 3s/step - loss: 3.7501 - accuracy: 0.1837 - val_loss: 3.5743 - val_accuracy: 0.2491 Learning Rate = 0.00089015486 Epoch 20/20 243/243 [==============================] - 663s 3s/step - loss: 3.7672 - accuracy: 0.1833 - val_loss: 3.5680 - val_accuracy: 0.2562 Learning Rate = 0.0008532423
pd.DataFrame(history.history).plot()
<AxesSubplot:>
model.evaluate(test_generator)
20698/20698 [==============================] - 398s 19ms/step - loss: 3.5672 - accuracy: 0.2559
[3.5671935081481934, 0.25587013363838196]
base_model.trainable = True
# Let's take a look to see how many layers are in the base model
print("Number of layers in the base model: ", len(base_model.layers))
# Fine tune from this layer onwards
fine_tune_at = 80
# Freeze all the layers before the `fine_tune_at` layer
for layer in base_model.layers[:fine_tune_at]:
layer.trainable = False
Number of layers in the base model: 190
model.summary()
Model: "model" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= input_2 (InputLayer) [(None, 224, 224, 3)] 0 _________________________________________________________________ resnet50v2 (Functional) (None, 7, 7, 2048) 23564800 _________________________________________________________________ global_average_pooling2d (Gl (None, 2048) 0 _________________________________________________________________ dropout (Dropout) (None, 2048) 0 _________________________________________________________________ dense (Dense) (None, 129) 264321 ================================================================= Total params: 23,829,121 Trainable params: 22,551,425 Non-trainable params: 1,277,696 _________________________________________________________________
# Define the optimizer, loss and metrics
model.compile(optimizer=tf.keras.optimizers.SGD(lr = LEARNING_RATE/5, momentum = 0.99, decay=1e-4),
loss=tf.keras.losses.SparseCategoricalCrossentropy(),
metrics=['accuracy'])
fine_tune_epochs = 200
total_epochs = early_stopping.stopped_epoch + fine_tune_epochs
history_fine = model.fit(train_generator,
epochs=total_epochs,
initial_epoch = 0,
callbacks=[early_stopping, lr_scheduler, PrintSGDLR()],
validation_data=val_generator)
Epoch 1/200 243/243 [==============================] - 710s 3s/step - loss: 3.6763 - accuracy: 0.1931 - val_loss: 2.5324 - val_accuracy: 0.4012 Learning Rate = 0.001952553 Epoch 2/200 243/243 [==============================] - 674s 3s/step - loss: 2.7910 - accuracy: 0.3454 - val_loss: 1.9835 - val_accuracy: 0.5176 Learning Rate = 0.0019073051 Epoch 3/200 243/243 [==============================] - 658s 3s/step - loss: 2.2663 - accuracy: 0.4589 - val_loss: 1.6345 - val_accuracy: 0.5990 Learning Rate = 0.0018641066 Epoch 4/200 243/243 [==============================] - 665s 3s/step - loss: 1.9040 - accuracy: 0.5383 - val_loss: 1.4199 - val_accuracy: 0.6587 Learning Rate = 0.0018228218 Epoch 5/200 243/243 [==============================] - 694s 3s/step - loss: 1.6878 - accuracy: 0.5884 - val_loss: 1.2590 - val_accuracy: 0.7018 Learning Rate = 0.0017833259 Epoch 6/200 243/243 [==============================] - 688s 3s/step - loss: 1.5038 - accuracy: 0.6375 - val_loss: 1.1262 - val_accuracy: 0.7301 Learning Rate = 0.0017455054 Epoch 7/200 243/243 [==============================] - 692s 3s/step - loss: 1.3831 - accuracy: 0.6647 - val_loss: 1.0674 - val_accuracy: 0.7453 Learning Rate = 0.0017092557 Epoch 8/200 243/243 [==============================] - 683s 3s/step - loss: 1.2344 - accuracy: 0.6987 - val_loss: 1.0266 - val_accuracy: 0.7517 Learning Rate = 0.0016744811 Epoch 9/200 243/243 [==============================] - 701s 3s/step - loss: 1.1732 - accuracy: 0.7134 - val_loss: 0.9817 - val_accuracy: 0.7663 Learning Rate = 0.0016410932 Epoch 10/200 243/243 [==============================] - 693s 3s/step - loss: 1.0919 - accuracy: 0.7305 - val_loss: 0.8745 - val_accuracy: 0.7922 Learning Rate = 0.0016090105 Epoch 11/200 243/243 [==============================] - 682s 3s/step - loss: 1.0240 - accuracy: 0.7463 - val_loss: 0.8734 - val_accuracy: 0.7944 Learning Rate = 0.0015781583 Epoch 12/200 243/243 [==============================] - 657s 3s/step - loss: 0.9852 - accuracy: 0.7566 - val_loss: 0.8683 - val_accuracy: 0.7942 Learning Rate = 0.0015484671 Epoch 13/200 243/243 [==============================] - 626s 3s/step - loss: 0.9432 - accuracy: 0.7668 - val_loss: 0.8067 - val_accuracy: 0.8075 Learning Rate = 0.0015198725 Epoch 14/200 243/243 [==============================] - 625s 3s/step - loss: 0.9014 - accuracy: 0.7771 - val_loss: 0.7753 - val_accuracy: 0.8196 Learning Rate = 0.0014923147 Epoch 15/200 243/243 [==============================] - 624s 3s/step - loss: 0.8454 - accuracy: 0.7919 - val_loss: 0.7456 - val_accuracy: 0.8290 Learning Rate = 0.0014657384 Epoch 16/200 243/243 [==============================] - 623s 3s/step - loss: 0.8022 - accuracy: 0.7994 - val_loss: 0.7073 - val_accuracy: 0.8332 Learning Rate = 0.0014400922 Epoch 17/200 243/243 [==============================] - 633s 3s/step - loss: 0.7785 - accuracy: 0.8056 - val_loss: 0.6999 - val_accuracy: 0.8342 Learning Rate = 0.0014153281 Epoch 18/200 243/243 [==============================] - 676s 3s/step - loss: 0.7474 - accuracy: 0.8131 - val_loss: 0.6685 - val_accuracy: 0.8466 Learning Rate = 0.0013914012 Epoch 19/200 243/243 [==============================] - 683s 3s/step - loss: 0.7186 - accuracy: 0.8222 - val_loss: 0.6741 - val_accuracy: 0.8466 Learning Rate = 0.0013682699 Epoch 20/200 243/243 [==============================] - 663s 3s/step - loss: 0.6924 - accuracy: 0.8270 - val_loss: 0.6675 - val_accuracy: 0.8432 Learning Rate = 0.001345895 Epoch 21/200 243/243 [==============================] - 625s 3s/step - loss: 0.6844 - accuracy: 0.8286 - val_loss: 0.6431 - val_accuracy: 0.8489 Learning Rate = 0.0013242404 Epoch 22/200 243/243 [==============================] - 691s 3s/step - loss: 0.6445 - accuracy: 0.8402 - val_loss: 0.6374 - val_accuracy: 0.8524 Learning Rate = 0.0013032713 Epoch 23/200 243/243 [==============================] - 678s 3s/step - loss: 0.6202 - accuracy: 0.8445 - val_loss: 0.6077 - val_accuracy: 0.8602 Learning Rate = 0.001282956 Epoch 24/200 243/243 [==============================] - 679s 3s/step - loss: 0.6029 - accuracy: 0.8494 - val_loss: 0.6237 - val_accuracy: 0.8536 Learning Rate = 0.0012632643 Epoch 25/200 243/243 [==============================] - 668s 3s/step - loss: 0.5989 - accuracy: 0.8508 - val_loss: 0.6153 - val_accuracy: 0.8563 Learning Rate = 0.000622084 Epoch 26/200 243/243 [==============================] - 671s 3s/step - loss: 0.5871 - accuracy: 0.8523 - val_loss: 0.5347 - val_accuracy: 0.8784 Learning Rate = 0.00061282027 Epoch 27/200 243/243 [==============================] - 671s 3s/step - loss: 0.4688 - accuracy: 0.8876 - val_loss: 0.5192 - val_accuracy: 0.8879 Learning Rate = 0.0006038283 Epoch 28/200 243/243 [==============================] - 672s 3s/step - loss: 0.4324 - accuracy: 0.8967 - val_loss: 0.5051 - val_accuracy: 0.8906 Learning Rate = 0.0005950965 Epoch 29/200 243/243 [==============================] - 673s 3s/step - loss: 0.3976 - accuracy: 0.9031 - val_loss: 0.4861 - val_accuracy: 0.8920 Learning Rate = 0.0005866135 Epoch 30/200 243/243 [==============================] - 661s 3s/step - loss: 0.3854 - accuracy: 0.9062 - val_loss: 0.4971 - val_accuracy: 0.8911 Learning Rate = 0.000578369 Epoch 31/200 243/243 [==============================] - 686s 3s/step - loss: 0.3683 - accuracy: 0.9109 - val_loss: 0.4875 - val_accuracy: 0.8925 Learning Rate = 0.00028517653 Epoch 32/200 243/243 [==============================] - 675s 3s/step - loss: 0.3638 - accuracy: 0.9109 - val_loss: 0.4771 - val_accuracy: 0.8999 Learning Rate = 0.00028127813 Epoch 33/200 243/243 [==============================] - 678s 3s/step - loss: 0.3223 - accuracy: 0.9229 - val_loss: 0.4467 - val_accuracy: 0.9061 Learning Rate = 0.0002774849 Epoch 34/200 243/243 [==============================] - 652s 3s/step - loss: 0.3012 - accuracy: 0.9289 - val_loss: 0.4432 - val_accuracy: 0.9093 Learning Rate = 0.0002737926 Epoch 35/200 243/243 [==============================] - 627s 3s/step - loss: 0.2901 - accuracy: 0.9324 - val_loss: 0.4465 - val_accuracy: 0.9062 Learning Rate = 0.00027019726 Epoch 36/200 243/243 [==============================] - 664s 3s/step - loss: 0.2842 - accuracy: 0.9327 - val_loss: 0.4545 - val_accuracy: 0.9053 Learning Rate = 0.00013334757 Epoch 37/200 243/243 [==============================] - 663s 3s/step - loss: 0.2943 - accuracy: 0.9297 - val_loss: 0.4367 - val_accuracy: 0.9090 Learning Rate = 0.00013164131 Epoch 38/200 243/243 [==============================] - 675s 3s/step - loss: 0.2694 - accuracy: 0.9365 - val_loss: 0.4435 - val_accuracy: 0.9090 Learning Rate = 0.00012997817 Epoch 39/200 243/243 [==============================] - 650s 3s/step - loss: 0.2534 - accuracy: 0.9416 - val_loss: 0.4394 - val_accuracy: 0.9116 Learning Rate = 6.417826e-05 Epoch 40/200 243/243 [==============================] - 630s 3s/step - loss: 0.2499 - accuracy: 0.9429 - val_loss: 0.4324 - val_accuracy: 0.9122 Learning Rate = 6.338743e-05 Epoch 41/200 243/243 [==============================] - 638s 3s/step - loss: 0.2481 - accuracy: 0.9427 - val_loss: 0.4325 - val_accuracy: 0.9138 Learning Rate = 6.261584e-05 Epoch 42/200 243/243 [==============================] - 630s 3s/step - loss: 0.2486 - accuracy: 0.9423 - val_loss: 0.4323 - val_accuracy: 0.9127 Learning Rate = 6.186282e-05 Epoch 43/200 243/243 [==============================] - 667s 3s/step - loss: 0.2452 - accuracy: 0.9441 - val_loss: 0.4285 - val_accuracy: 0.9134 Learning Rate = 6.1127685e-05 Epoch 44/200 243/243 [==============================] - 668s 3s/step - loss: 0.2432 - accuracy: 0.9449 - val_loss: 0.4311 - val_accuracy: 0.9133 Learning Rate = 6.040982e-05 Epoch 45/200 243/243 [==============================] - 634s 3s/step - loss: 0.2378 - accuracy: 0.9461 - val_loss: 0.4336 - val_accuracy: 0.9128 Learning Rate = 2.9854311e-05 Epoch 46/200 243/243 [==============================] - 638s 3s/step - loss: 0.2361 - accuracy: 0.9444 - val_loss: 0.4242 - val_accuracy: 0.9153 Learning Rate = 2.9511759e-05 Epoch 47/200 243/243 [==============================] - 641s 3s/step - loss: 0.2354 - accuracy: 0.9450 - val_loss: 0.4278 - val_accuracy: 0.9154 Learning Rate = 2.917698e-05 Epoch 48/200 243/243 [==============================] - 652s 3s/step - loss: 0.2288 - accuracy: 0.9477 - val_loss: 0.4288 - val_accuracy: 0.9155 Learning Rate = 1.4424853e-05 Epoch 49/200 243/243 [==============================] - 623s 3s/step - loss: 0.2324 - accuracy: 0.9468 - val_loss: 0.4269 - val_accuracy: 0.9153 Learning Rate = 1.4264847e-05 Epoch 50/200 243/243 [==============================] - 625s 3s/step - loss: 0.2309 - accuracy: 0.9476 - val_loss: 0.4236 - val_accuracy: 0.9155 Learning Rate = 1.4108354e-05
pd.DataFrame(history_fine.history).plot()
<AxesSubplot:>
model.evaluate(test_generator)
20698/20698 [==============================] - 336s 16ms/step - loss: 0.4140 - accuracy: 0.9140
[0.41399112343788147, 0.9139530658721924]
columns = 4
rows = 4
fig=plt.figure(figsize=(40, 40))
class_descriptions = list(test_generator.class_indices.keys())
# Iterate in rows and columns
for i in range(1, columns * rows + 1):
# reserve an spot for the image
fig.add_subplot(rows, columns, i)
# retrieves the test image and the true label
img, true_label = test_generator.next()
# makes the prediction using the image
predicted_label = np.argmax(model.predict(img), axis=-1)[0]
# get a description for the prediction
prediction_description = class_descriptions[predicted_label]
# get a description for the true value
true_description = class_descriptions[int(true_label)]
# set the title according to the prediction
if true_label == predicted_label:
plt.title("It is a {}".format(prediction_description), color='green')
else:
plt.title("Predicted: {} \n What really is: {}".format(prediction_description, true_description), color='red')
# plot the image
plt.imshow(img[0])
# show the plot
plt.show()
test_generator.reset()
true_labels, predicted_labels = [], []
total_imgs = len(test_generator.filenames)
idx = 0
while idx < total_imgs:
img, true_label = test_generator.next()
true_labels.append(true_label)
predicted_labels.append(np.argmax(model.predict(img), axis=-1)[0])
idx += 1
if idx % 10==0:
print('.', end='')
.....................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................
from sklearn.metrics import classification_report, confusion_matrix
print(classification_report(true_labels, predicted_labels, target_names=class_descriptions))
precision recall f1-score support
2008 0.98 0.96 0.97 125
206 0.92 0.94 0.93 167
207 0.96 0.97 0.96 198
208 0.98 0.96 0.97 200
307 0.97 0.89 0.93 124
308 0.97 0.95 0.96 119
320I 0.85 0.94 0.89 135
408 0.95 0.96 0.96 80
500 0.97 1.00 0.99 74
A3 0.86 0.87 0.87 202
A4 0.76 0.71 0.73 78
AGILE 0.94 0.96 0.95 198
AIRCROSS 0.91 0.98 0.94 156
AMAROK 0.98 0.97 0.98 188
ARGO 0.98 0.97 0.98 191
ASTRA 0.95 0.92 0.94 221
ASX 0.95 0.96 0.96 107
BRAVO 0.92 0.93 0.92 71
C-180 0.91 0.95 0.93 152
C3 0.94 0.94 0.94 217
C4 0.93 0.91 0.92 188
C4 CACTUS 0.97 0.95 0.96 65
CAPTIVA 0.96 0.96 0.96 91
CAPTUR 1.00 1.00 1.00 202
CELTA 0.88 0.89 0.89 177
CERATO 0.82 0.87 0.84 75
CITY 0.93 0.94 0.94 208
CIVIC 0.89 0.89 0.89 196
CLASSIC 0.82 0.86 0.84 218
CLIO 0.94 0.92 0.93 188
COBALT 0.97 0.97 0.97 192
COMPASS 0.99 0.98 0.99 192
COOPER 0.96 1.00 0.98 77
COROLLA 0.90 0.89 0.90 191
CORSA 0.77 0.75 0.76 205
CR-V 0.98 0.97 0.98 131
CRETA 0.95 0.99 0.97 159
CRONOS 0.89 0.94 0.92 71
CROSSFOX 0.94 0.94 0.94 156
CRUZE 0.97 0.92 0.94 216
DOBLO 0.99 0.98 0.98 211
DUCATO 0.92 0.94 0.93 65
DUSTER 0.99 0.97 0.98 205
ECOSPORT 0.93 0.97 0.95 192
ESCORT 0.82 0.67 0.74 69
ETIOS 0.97 0.96 0.96 197
FIESTA 0.94 0.93 0.93 201
FIORINO 0.97 0.94 0.95 189
FIT 0.92 0.97 0.94 186
FLUENCE 1.00 0.91 0.95 101
FOCUS 0.93 0.95 0.94 190
FOX 0.90 0.79 0.84 206
FRONTIER 0.97 0.91 0.94 159
FUSCA 0.95 0.99 0.97 212
FUSION 0.94 0.96 0.95 200
GOL 0.60 0.66 0.63 192
GOLF 0.87 0.89 0.88 209
GRAND SIENA 0.91 0.96 0.94 208
HB20 0.85 0.78 0.81 209
HB20S 0.79 0.86 0.82 187
HB20X 0.97 0.89 0.93 75
HILUX 0.97 0.94 0.95 179
HR-V 0.96 0.99 0.97 218
I30 0.93 0.92 0.92 153
IDEA 0.96 0.99 0.97 179
IX35 0.94 0.97 0.95 165
JETTA 0.93 0.89 0.91 200
JOY 0.75 0.82 0.79 190
KA 0.85 0.84 0.85 200
KA+ 0.88 0.87 0.88 139
KICKS 1.00 0.98 0.99 185
KOMBI 0.98 0.98 0.98 108
KWID 1.00 0.99 0.99 230
L200 0.89 0.92 0.90 229
LINEA 0.99 0.91 0.95 78
LIVINA 0.93 0.91 0.92 75
LOGAN 0.86 0.95 0.90 189
MARCH 0.96 0.99 0.97 166
MASTER 0.96 0.95 0.96 85
MEGANE 0.93 0.94 0.94 143
MERIVA 0.98 0.94 0.96 138
MOBI 1.00 0.99 0.99 224
MONTANA 0.95 0.91 0.93 173
MONZA 0.81 0.85 0.83 60
NIVUS 0.97 0.99 0.98 84
ONIX 0.63 0.56 0.59 209
OPALA 0.90 0.90 0.90 79
OUTLANDER 0.89 0.92 0.90 121
PAJERO 0.90 0.94 0.92 188
PALIO 0.81 0.78 0.80 195
PARATI 0.72 0.83 0.77 162
PASSAT 0.82 0.76 0.79 90
PICANTO 0.98 0.96 0.97 92
POLO 0.76 0.81 0.78 188
PRISMA 0.79 0.70 0.74 192
PUNTO 0.96 0.96 0.96 191
Q3 0.94 0.94 0.94 79
RANGE ROVER 0.98 0.95 0.96 185
RANGER 0.91 0.93 0.92 193
RENEGADE 1.00 0.99 1.00 187
S10 0.95 0.91 0.93 211
SANDERO 0.92 0.88 0.90 205
SANTA FE 0.96 0.83 0.89 89
SANTANA 0.72 0.84 0.77 92
SAVEIRO 0.84 0.85 0.84 208
SENTRA 0.90 0.95 0.92 161
SIENA 0.75 0.71 0.73 182
SORENTO 0.95 0.89 0.92 82
SPACEFOX 0.75 0.92 0.82 130
SPIN 0.99 0.96 0.97 213
SPORTAGE 0.96 0.91 0.94 179
SPRINTER 0.96 0.96 0.96 121
STRADA 0.96 0.93 0.94 193
T-4 0.97 1.00 0.99 68
T-CROSS 0.97 1.00 0.98 222
TIGUAN 0.93 0.96 0.94 139
TORO 0.99 0.99 0.99 202
TRACKER 0.96 0.96 0.96 186
TUCSON 0.96 0.96 0.96 192
UNO 0.88 0.95 0.91 206
UP! 0.99 0.98 0.98 215
VECTRA 0.87 0.88 0.88 199
VERSA 0.97 0.95 0.96 205
VIRTUS 0.91 0.82 0.86 205
VOYAGE 0.76 0.70 0.73 207
X1 0.89 0.97 0.93 96
XSARA 0.95 0.98 0.97 100
YARIS 0.99 0.92 0.95 84
ZAFIRA 0.96 0.92 0.94 101
accuracy 0.91 20698
macro avg 0.92 0.91 0.91 20698
weighted avg 0.91 0.91 0.91 20698
model.save('models/car_classifier_resnet50v2_129types_91percert.h5')